#map()
map(ord, 'The quick brown fox') 
map 
m = map(ord, 'turtles all the way down')
isinstance(m, map) 

list(m)

#Tracing map execution
from trace_decorator import Trace #from chapter 3
result = map(Trace()(ord), 'The quick brown fox')
result 
next(result) 
next(result) 
next(result) 

list(map(ord, 'The quick brown fox')) 

for o in map(ord, 'The quick brown fox'):
    print(o) 


#Multiple input sequences
sizes = ['small', 'medium', 'large']
colors = ['lavender', 'teal', 'burnt orange']
animals = ['koala', 'platypus', 'salamander']
def combine(size, color, animal):
    return '{} {} {}'.format(size, color, animal)

list(map(combine, sizes, colors, animals))


#Inputs of different sizes
def combine(quantity, size, color, animal):
    return '{} x {} {} {}'.format(quantity, size, color, animal)

import itertools
list(map(combine, itertools.count(), sizes, colors, animals)) 


#map() versuscomprehensions
[str(i) for i in range(5)] 
list(map(str, range(5))) 

i = (str(i) for i in range(5))
list(i) 

i = map(str, range(5)) 
list(i) 


#filter()
positives = filter(lambda x: x > 0, [1, -5, 0, 6, -2,8])
positives 
list(positives) 

trues = filter(None, [0, 1, False, True, [], [1, 2, 3], '', 'hello'])
list(trues) 


#functools.reduce()
from functools import reduce
import operator 
reduce(operator.add, [1, 2, 3, 4, 5])

numbers = [1, 2, 3, 4, 5] 
accumulator = numbers[0] 
for item in numbers[1:]: 
    accumulator = operator.add(accumulator, item)
accumulator

def mul(x, y):
    print('mul {} {}'.format(x, y))
    return x * y

reduce(mul, range(1, 10)) 


#reduce() details
reduce(mul, []) #error
reduce(mul, [1]) #doesn't do anything 

#initial value
values = [1, 2, 3]
reduce(operator.add, values, 0) #initial value 0

values = []
reduce(operator.add, values, 0) #no error

values = [1, 2, 3] 
reduce(operator.mul, values, 1) #use 1 for mulitiplication

#Combining functional concepts:map-reduce
def count_words(doc): 
    normalised_doc = ''.join(c.lower() if c.isalpha() else ' ' for c in doc) 
    frequencies = {} 
    for word in normalised_doc.split(): 
        frequencies[word] = frequencies.get(word, 0) + 1 
    return frequencies

count_words('It was the best of times, it was the worst of times.') 

documents = [ 'It was the best of times, it was the worst of times.', 'I went to the woods because I wished to live deliberately, to front only the essential facts of life...', 'Friends, Romans, countrymen, lend me your ears; I come to bury Caesar, not to praise him.', 'I do not like green eggs and ham. I do not like them, Sam-I-Am.', ]

counts = map(count_words, documents)
list(counts)

def combine_counts(d1, d2): 
    d = d1.copy() 
    for word, count in d2.items(): 
        d[word] = d.get(word, 0) + count 
    return d

counts = map(count_words, documents)
total_counts = reduce(combine_counts, counts)
total_counts 